1、OpenGL生成纹理
2、纹理绑定到SurfaceTexture上
3、用SurfaceTexture做参数创建Surface
4、MediaCodec解码的视频就往Surface发送,就显示出画面了
- Shader编写
vertex_shader.glsl1
2
3
4
5
6
7
8
attribute vec4 av_Position;
attribute vec2 af_Position;
varying vec2 v_texPosition;
void main() {
v_texPosition = af_Position;
gl_Position = av_Position;
}
fragment_mediacodec.glsl
1 |
|
VideoRender.java1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151import android.content.Context;
import android.graphics.SurfaceTexture;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.view.Surface;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
public class VideoRender implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
private Context context;
private final float[] vertexData = {
-1f, -1f,
1f, -1f,
-1f, 1f,
1f, 1f
};
private final float[] textureData = {
0f, 1f,
1f, 1f,
0f, 0f,
1f, 0f
};
private FloatBuffer vertexBuffer;
private FloatBuffer textureBuffer;
//mediacodec
private int program_mediacodec;
private int avPosition_mediacodec;
private int afPosition_mediacodec;
private int samplerOES_mediacodec;
private int textureId_mediacodec;
private SurfaceTexture surfaceTexture;
private Surface surface;
private OnSurfaceCreateListener onSurfaceCreateListener;
private OnRenderListener onRenderListener;
public VideoRender(Context context) {
this.context = context;
vertexBuffer = ByteBuffer.allocateDirect(vertexData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(vertexData);
vertexBuffer.position(0);
textureBuffer = ByteBuffer.allocateDirect(textureData.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer()
.put(textureData);
textureBuffer.position(0);
}
public void setOnSurfaceCreateListener(OnSurfaceCreateListener onSurfaceCreateListener) {
this.onSurfaceCreateListener = onSurfaceCreateListener;
}
public void setOnRenderListener(OnRenderListener onRenderListener) {
this.onRenderListener = onRenderListener;
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
initRenderMediacodec();
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES20.glViewport(0, 0, width, height);
}
public void onDrawFrame(GL10 gl) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
renderMediacodec();
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
if (onRenderListener != null) {
////将onFrameAvailable函数回掉到GLSurfaceView调用requestRender()触发onDrawFrame()
onRenderListener.onRender();
}
}
private void initRenderMediacodec() {
String vertexSource = ShaderUtil.readRawTxt(context, R.raw.vertex_shader);
String fragmentSource = ShaderUtil.readRawTxt(context, R.raw.fragment_mediacodec);
program_mediacodec = ShaderUtil.createProgram(vertexSource, fragmentSource);
avPosition_mediacodec = GLES20.glGetAttribLocation(program_mediacodec, "av_Position");
afPosition_mediacodec = GLES20.glGetAttribLocation(program_mediacodec, "af_Position");
samplerOES_mediacodec = GLES20.glGetUniformLocation(program_mediacodec, "sTexture");
int[] textureids = new int[1];
GLES20.glGenTextures(1, textureids, 0);
textureId_mediacodec = textureids[0];
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
surfaceTexture = new SurfaceTexture(textureId_mediacodec);
surface = new Surface(surfaceTexture);
surfaceTexture.setOnFrameAvailableListener(this);
if (onSurfaceCreateListener != null) {
//将Surface回掉出去给MediaCodec绑定渲染
onSurfaceCreateListener.onSurfaceCreate(surface);
}
}
private void renderMediacodec() {
surfaceTexture.updateTexImage();
GLES20.glUseProgram(program_mediacodec);
GLES20.glEnableVertexAttribArray(avPosition_mediacodec);
GLES20.glVertexAttribPointer(avPosition_mediacodec, 2, GLES20.GL_FLOAT, false, 8, vertexBuffer);
GLES20.glEnableVertexAttribArray(afPosition_mediacodec);
GLES20.glVertexAttribPointer(afPosition_mediacodec, 2, GLES20.GL_FLOAT, false, 8, textureBuffer);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId_mediacodec);
GLES20.glUniform1i(samplerOES_mediacodec, 0);
}
public interface OnSurfaceCreateListener {
void onSurfaceCreate(Surface surface);
}
public interface OnRenderListener {
void onRender();
}
}
VideoGLSurfaceView.java1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31import android.content.Context;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;
public class VideoGLSurfaceView extends GLSurfaceView {
private VideoRender render;
public VideoGLSurfaceView(Context context) {
this(context, null);
}
public VideoGLSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
setEGLContextClientVersion(2);
render = new VideoRender(context);
setRenderer(render);
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
render.setOnRenderListener(new VideoRender.OnRenderListener() {
public void onRender() {
requestRender();
}
});
}
public VideoRender getWlRender() {
return render;
}
}